Skip to main content

alloc/collections/binary_heap/
mod.rs

1//! A priority queue implemented with a binary heap.
2//!
3//! Insertion and popping the largest element have *O*(log(*n*)) time complexity.
4//! Checking the largest element is *O*(1). Converting a vector to a binary heap
5//! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be
6//! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* * log(*n*))
7//! in-place heapsort.
8//!
9//! # Examples
10//!
11//! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
12//! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
13//! It shows how to use [`BinaryHeap`] with custom types.
14//!
15//! [dijkstra]: https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
16//! [sssp]: https://en.wikipedia.org/wiki/Shortest_path_problem
17//! [dir_graph]: https://en.wikipedia.org/wiki/Directed_graph
18//!
19//! ```
20//! use std::cmp::Ordering;
21//! use std::collections::BinaryHeap;
22//!
23//! #[derive(Copy, Clone, Eq, PartialEq)]
24//! struct State {
25//!     cost: usize,
26//!     position: usize,
27//! }
28//!
29//! // The priority queue depends on `Ord`.
30//! // Explicitly implement the trait so the queue becomes a min-heap
31//! // instead of a max-heap.
32//! impl Ord for State {
33//!     fn cmp(&self, other: &Self) -> Ordering {
34//!         // Notice that we flip the ordering on costs.
35//!         // In case of a tie we compare positions - this step is necessary
36//!         // to make implementations of `PartialEq` and `Ord` consistent.
37//!         other.cost.cmp(&self.cost)
38//!             .then_with(|| self.position.cmp(&other.position))
39//!     }
40//! }
41//!
42//! // `PartialOrd` needs to be implemented as well.
43//! impl PartialOrd for State {
44//!     fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
45//!         Some(self.cmp(other))
46//!     }
47//! }
48//!
49//! // Each node is represented as a `usize`, for a shorter implementation.
50//! struct Edge {
51//!     node: usize,
52//!     cost: usize,
53//! }
54//!
55//! // Dijkstra's shortest path algorithm.
56//!
57//! // Start at `start` and use `dist` to track the current shortest distance
58//! // to each node. This implementation isn't memory-efficient as it may leave duplicate
59//! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
60//! // for a simpler implementation.
61//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
62//!     // dist[node] = current shortest distance from `start` to `node`
63//!     let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
64//!
65//!     let mut heap = BinaryHeap::new();
66//!
67//!     // We're at `start`, with a zero cost
68//!     dist[start] = 0;
69//!     heap.push(State { cost: 0, position: start });
70//!
71//!     // Examine the frontier with lower cost nodes first (min-heap)
72//!     while let Some(State { cost, position }) = heap.pop() {
73//!         // Alternatively we could have continued to find all shortest paths
74//!         if position == goal { return Some(cost); }
75//!
76//!         // Important as we may have already found a better way
77//!         if cost > dist[position] { continue; }
78//!
79//!         // For each node we can reach, see if we can find a way with
80//!         // a lower cost going through this node
81//!         for edge in &adj_list[position] {
82//!             let next = State { cost: cost + edge.cost, position: edge.node };
83//!
84//!             // If so, add it to the frontier and continue
85//!             if next.cost < dist[next.position] {
86//!                 heap.push(next);
87//!                 // Relaxation, we have now found a better way
88//!                 dist[next.position] = next.cost;
89//!             }
90//!         }
91//!     }
92//!
93//!     // Goal not reachable
94//!     None
95//! }
96//!
97//! fn main() {
98//!     // This is the directed graph we're going to use.
99//!     // The node numbers correspond to the different states,
100//!     // and the edge weights symbolize the cost of moving
101//!     // from one node to another.
102//!     // Note that the edges are one-way.
103//!     //
104//!     //                  7
105//!     //          +-----------------+
106//!     //          |                 |
107//!     //          v   1        2    |  2
108//!     //          0 -----> 1 -----> 3 ---> 4
109//!     //          |        ^        ^      ^
110//!     //          |        | 1      |      |
111//!     //          |        |        | 3    | 1
112//!     //          +------> 2 -------+      |
113//!     //           10      |               |
114//!     //                   +---------------+
115//!     //
116//!     // The graph is represented as an adjacency list where each index,
117//!     // corresponding to a node value, has a list of outgoing edges.
118//!     // Chosen for its efficiency.
119//!     let graph = vec![
120//!         // Node 0
121//!         vec![Edge { node: 2, cost: 10 },
122//!              Edge { node: 1, cost: 1 }],
123//!         // Node 1
124//!         vec![Edge { node: 3, cost: 2 }],
125//!         // Node 2
126//!         vec![Edge { node: 1, cost: 1 },
127//!              Edge { node: 3, cost: 3 },
128//!              Edge { node: 4, cost: 1 }],
129//!         // Node 3
130//!         vec![Edge { node: 0, cost: 7 },
131//!              Edge { node: 4, cost: 2 }],
132//!         // Node 4
133//!         vec![]];
134//!
135//!     assert_eq!(shortest_path(&graph, 0, 1), Some(1));
136//!     assert_eq!(shortest_path(&graph, 0, 3), Some(3));
137//!     assert_eq!(shortest_path(&graph, 3, 0), Some(7));
138//!     assert_eq!(shortest_path(&graph, 0, 4), Some(5));
139//!     assert_eq!(shortest_path(&graph, 4, 0), None);
140//! }
141//! ```
142
143#![allow(missing_docs)]
144#![stable(feature = "rust1", since = "1.0.0")]
145
146use core::alloc::Allocator;
147use core::iter::{FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen};
148use core::mem::{self, ManuallyDrop, swap};
149use core::num::NonZero;
150use core::ops::{Deref, DerefMut};
151use core::{fmt, ptr};
152
153use crate::alloc::Global;
154use crate::collections::TryReserveError;
155use crate::slice;
156#[cfg(not(test))]
157use crate::vec::AsVecIntoIter;
158use crate::vec::{self, Vec};
159
160/// A priority queue implemented with a binary heap.
161///
162/// This will be a max-heap.
163///
164/// It is a logic error for an item to be modified in such a way that the
165/// item's ordering relative to any other item, as determined by the [`Ord`]
166/// trait, changes while it is in the heap. This is normally only possible
167/// through interior mutability, global state, I/O, or unsafe code. The
168/// behavior resulting from such a logic error is not specified, but will
169/// be encapsulated to the `BinaryHeap` that observed the logic error and not
170/// result in undefined behavior. This could include panics, incorrect results,
171/// aborts, memory leaks, and non-termination.
172///
173/// As long as no elements change their relative order while being in the heap
174/// as described above, the API of `BinaryHeap` guarantees that the heap
175/// invariant remains intact i.e. its methods all behave as documented. For
176/// example if a method is documented as iterating in sorted order, that's
177/// guaranteed to work as long as elements in the heap have not changed order,
178/// even in the presence of closures getting unwinded out of, iterators getting
179/// leaked, and similar foolishness.
180///
181/// # Examples
182///
183/// ```
184/// use std::collections::BinaryHeap;
185///
186/// // Type inference lets us omit an explicit type signature (which
187/// // would be `BinaryHeap<i32>` in this example).
188/// let mut heap = BinaryHeap::new();
189///
190/// // We can use peek to look at the next item in the heap. In this case,
191/// // there's no items in there yet so we get None.
192/// assert_eq!(heap.peek(), None);
193///
194/// // Let's add some scores...
195/// heap.push(1);
196/// heap.push(5);
197/// heap.push(2);
198///
199/// // Now peek shows the most important item in the heap.
200/// assert_eq!(heap.peek(), Some(&5));
201///
202/// // We can check the length of a heap.
203/// assert_eq!(heap.len(), 3);
204///
205/// // We can iterate over the items in the heap, although they are returned in
206/// // a random order.
207/// for x in &heap {
208///     println!("{x}");
209/// }
210///
211/// // If we instead pop these scores, they should come back in order.
212/// assert_eq!(heap.pop(), Some(5));
213/// assert_eq!(heap.pop(), Some(2));
214/// assert_eq!(heap.pop(), Some(1));
215/// assert_eq!(heap.pop(), None);
216///
217/// // We can clear the heap of any remaining items.
218/// heap.clear();
219///
220/// // The heap should now be empty.
221/// assert!(heap.is_empty())
222/// ```
223///
224/// A `BinaryHeap` with a known list of items can be initialized from an array:
225///
226/// ```
227/// use std::collections::BinaryHeap;
228///
229/// let heap = BinaryHeap::from([1, 5, 2]);
230/// ```
231///
232/// ## Min-heap
233///
234/// Either [`core::cmp::Reverse`] or a custom [`Ord`] implementation can be used to
235/// make `BinaryHeap` a min-heap. This makes `heap.pop()` return the smallest
236/// value instead of the greatest one.
237///
238/// ```
239/// use std::collections::BinaryHeap;
240/// use std::cmp::Reverse;
241///
242/// let mut heap = BinaryHeap::new();
243///
244/// // Wrap values in `Reverse`
245/// heap.push(Reverse(1));
246/// heap.push(Reverse(5));
247/// heap.push(Reverse(2));
248///
249/// // If we pop these scores now, they should come back in the reverse order.
250/// assert_eq!(heap.pop(), Some(Reverse(1)));
251/// assert_eq!(heap.pop(), Some(Reverse(2)));
252/// assert_eq!(heap.pop(), Some(Reverse(5)));
253/// assert_eq!(heap.pop(), None);
254/// ```
255///
256/// # Time complexity
257///
258/// | [push]  | [pop]         | [peek]/[peek\_mut] |
259/// |---------|---------------|--------------------|
260/// | *O*(1)~ | *O*(log(*n*)) | *O*(1)             |
261///
262/// The value for `push` is an expected cost; the method documentation gives a
263/// more detailed analysis.
264///
265/// [`core::cmp::Reverse`]: core::cmp::Reverse
266/// [`Cell`]: core::cell::Cell
267/// [`RefCell`]: core::cell::RefCell
268/// [push]: BinaryHeap::push
269/// [pop]: BinaryHeap::pop
270/// [peek]: BinaryHeap::peek
271/// [peek\_mut]: BinaryHeap::peek_mut
272#[stable(feature = "rust1", since = "1.0.0")]
273#[cfg_attr(not(test), rustc_diagnostic_item = "BinaryHeap")]
274pub struct BinaryHeap<
275    T,
276    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
277> {
278    data: Vec<T, A>,
279}
280
281/// Structure wrapping a mutable reference to the greatest item on a
282/// `BinaryHeap`.
283///
284/// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
285/// its documentation for more.
286///
287/// [`peek_mut`]: BinaryHeap::peek_mut
288#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
289pub struct PeekMut<
290    'a,
291    T: 'a + Ord,
292    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
293> {
294    heap: &'a mut BinaryHeap<T, A>,
295    // If a set_len + sift_down are required, this is Some. If a &mut T has not
296    // yet been exposed to peek_mut()'s caller, it's None.
297    original_len: Option<NonZero<usize>>,
298}
299
300#[stable(feature = "collection_debug", since = "1.17.0")]
301impl<T: Ord + fmt::Debug, A: Allocator> fmt::Debug for PeekMut<'_, T, A> {
302    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
303        f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
304    }
305}
306
307#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
308impl<T: Ord, A: Allocator> Drop for PeekMut<'_, T, A> {
309    fn drop(&mut self) {
310        if let Some(original_len) = self.original_len {
311            // SAFETY: That's how many elements were in the Vec at the time of
312            // the PeekMut::deref_mut call, and therefore also at the time of
313            // the BinaryHeap::peek_mut call. Since the PeekMut did not end up
314            // getting leaked, we are now undoing the leak amplification that
315            // the DerefMut prepared for.
316            unsafe { self.heap.data.set_len(original_len.get()) };
317
318            // SAFETY: PeekMut is only instantiated for non-empty heaps.
319            unsafe { self.heap.sift_down(0) };
320        }
321    }
322}
323
324#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
325impl<T: Ord, A: Allocator> Deref for PeekMut<'_, T, A> {
326    type Target = T;
327    fn deref(&self) -> &T {
328        debug_assert!(!self.heap.is_empty());
329        // SAFE: PeekMut is only instantiated for non-empty heaps
330        unsafe { self.heap.data.get_unchecked(0) }
331    }
332}
333
334#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
335impl<T: Ord, A: Allocator> DerefMut for PeekMut<'_, T, A> {
336    fn deref_mut(&mut self) -> &mut T {
337        debug_assert!(!self.heap.is_empty());
338
339        let len = self.heap.len();
340        if len > 1 {
341            // Here we preemptively leak all the rest of the underlying vector
342            // after the currently max element. If the caller mutates the &mut T
343            // we're about to give them, and then leaks the PeekMut, all these
344            // elements will remain leaked. If they don't leak the PeekMut, then
345            // either Drop or PeekMut::pop will un-leak the vector elements.
346            //
347            // This is technique is described throughout several other places in
348            // the standard library as "leak amplification".
349            unsafe {
350                // SAFETY: len > 1 so len != 0.
351                self.original_len = Some(NonZero::new_unchecked(len));
352                // SAFETY: len > 1 so all this does for now is leak elements,
353                // which is safe.
354                self.heap.data.set_len(1);
355            }
356        }
357
358        // SAFE: PeekMut is only instantiated for non-empty heaps
359        unsafe { self.heap.data.get_unchecked_mut(0) }
360    }
361}
362
363impl<'a, T: Ord, A: Allocator> PeekMut<'a, T, A> {
364    /// Sifts the current element to its new position.
365    ///
366    /// Afterwards refers to the new element. Returns if the element changed.
367    ///
368    /// ## Examples
369    ///
370    /// The condition can be used to upper bound all elements in the heap. When only few elements
371    /// are affected, the heap's sort ensures this is faster than a reconstruction from the raw
372    /// element list and requires no additional allocation.
373    ///
374    /// ```
375    /// #![feature(binary_heap_peek_mut_refresh)]
376    /// use std::collections::BinaryHeap;
377    ///
378    /// let mut heap: BinaryHeap<u32> = (0..128).collect();
379    /// let mut peek = heap.peek_mut().unwrap();
380    ///
381    /// loop {
382    ///     *peek = 99;
383    ///
384    ///     if !peek.refresh() {
385    ///         break;
386    ///     }
387    /// }
388    ///
389    /// // Post condition, this is now an upper bound.
390    /// assert!(*peek < 100);
391    /// ```
392    ///
393    /// When the element remains the maximum after modification, the peek remains unchanged:
394    ///
395    /// ```
396    /// #![feature(binary_heap_peek_mut_refresh)]
397    /// use std::collections::BinaryHeap;
398    ///
399    /// let mut heap: BinaryHeap<u32> = [1, 2, 3].into();
400    /// let mut peek = heap.peek_mut().unwrap();
401    ///
402    /// assert_eq!(*peek, 3);
403    /// *peek = 42;
404    ///
405    /// // When we refresh, the peek is updated to the new maximum.
406    /// assert!(!peek.refresh(), "42 is even larger than 3");
407    /// assert_eq!(*peek, 42);
408    /// ```
409    #[unstable(feature = "binary_heap_peek_mut_refresh", issue = "138355")]
410    #[must_use = "is equivalent to dropping and getting a new PeekMut except for return information"]
411    pub fn refresh(&mut self) -> bool {
412        // The length of the underlying heap is unchanged by sifting down. The value stored for leak
413        // amplification thus remains accurate. We erase the leak amplification firstly because the
414        // operation is then equivalent to constructing a new PeekMut and secondly this avoids any
415        // future complication where original_len being non-empty would be interpreted as the heap
416        // having been leak amplified instead of checking the heap itself.
417        if let Some(original_len) = self.original_len.take() {
418            // SAFETY: This is how many elements were in the Vec at the time of
419            // the BinaryHeap::peek_mut call.
420            unsafe { self.heap.data.set_len(original_len.get()) };
421
422            // The length of the heap did not change by sifting, upholding our own invariants.
423
424            // SAFETY: PeekMut is only instantiated for non-empty heaps.
425            (unsafe { self.heap.sift_down(0) }) != 0
426        } else {
427            // The element was not modified.
428            false
429        }
430    }
431
432    /// Removes the peeked value from the heap and returns it.
433    #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
434    pub fn pop(mut this: PeekMut<'a, T, A>) -> T {
435        if let Some(original_len) = this.original_len.take() {
436            // SAFETY: This is how many elements were in the Vec at the time of
437            // the BinaryHeap::peek_mut call.
438            unsafe { this.heap.data.set_len(original_len.get()) };
439
440            // Unlike in Drop, here we don't also need to do a sift_down even if
441            // the caller could've mutated the element. It is removed from the
442            // heap on the next line and pop() is not sensitive to its value.
443        }
444
445        // SAFETY: Have a `PeekMut` element proves that the associated binary heap being non-empty,
446        // so the `pop` operation will not fail.
447        unsafe { this.heap.pop().unwrap_unchecked() }
448    }
449}
450
451#[stable(feature = "rust1", since = "1.0.0")]
452impl<T: Clone, A: Allocator + Clone> Clone for BinaryHeap<T, A> {
453    fn clone(&self) -> Self {
454        BinaryHeap { data: self.data.clone() }
455    }
456
457    /// Overwrites the contents of `self` with a clone of the contents of `source`.
458    ///
459    /// This method is preferred over simply assigning `source.clone()` to `self`,
460    /// as it avoids reallocation if possible.
461    ///
462    /// See [`Vec::clone_from()`] for more details.
463    fn clone_from(&mut self, source: &Self) {
464        self.data.clone_from(&source.data);
465    }
466}
467
468#[stable(feature = "rust1", since = "1.0.0")]
469impl<T> Default for BinaryHeap<T> {
470    /// Creates an empty `BinaryHeap<T>`.
471    #[inline]
472    fn default() -> BinaryHeap<T> {
473        BinaryHeap::new()
474    }
475}
476
477#[stable(feature = "binaryheap_debug", since = "1.4.0")]
478impl<T: fmt::Debug, A: Allocator> fmt::Debug for BinaryHeap<T, A> {
479    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
480        f.debug_list().entries(self.iter()).finish()
481    }
482}
483
484struct RebuildOnDrop<
485    'a,
486    T: Ord,
487    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
488> {
489    heap: &'a mut BinaryHeap<T, A>,
490    rebuild_from: usize,
491}
492
493impl<T: Ord, A: Allocator> Drop for RebuildOnDrop<'_, T, A> {
494    fn drop(&mut self) {
495        self.heap.rebuild_tail(self.rebuild_from);
496    }
497}
498
499impl<T> BinaryHeap<T> {
500    /// Creates an empty `BinaryHeap` as a max-heap.
501    ///
502    /// # Examples
503    ///
504    /// Basic usage:
505    ///
506    /// ```
507    /// use std::collections::BinaryHeap;
508    /// let mut heap = BinaryHeap::new();
509    /// heap.push(4);
510    /// ```
511    #[stable(feature = "rust1", since = "1.0.0")]
512    #[rustc_const_stable(feature = "const_binary_heap_constructor", since = "1.80.0")]
513    #[must_use]
514    pub const fn new() -> BinaryHeap<T> {
515        BinaryHeap { data: vec![] }
516    }
517
518    /// Creates an empty `BinaryHeap` with at least the specified capacity.
519    ///
520    /// The binary heap will be able to hold at least `capacity` elements without
521    /// reallocating. This method is allowed to allocate for more elements than
522    /// `capacity`. If `capacity` is zero, the binary heap will not allocate.
523    ///
524    /// # Examples
525    ///
526    /// Basic usage:
527    ///
528    /// ```
529    /// use std::collections::BinaryHeap;
530    /// let mut heap = BinaryHeap::with_capacity(10);
531    /// heap.push(4);
532    /// ```
533    #[stable(feature = "rust1", since = "1.0.0")]
534    #[must_use]
535    pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
536        BinaryHeap { data: Vec::with_capacity(capacity) }
537    }
538}
539
540impl<T, A: Allocator> BinaryHeap<T, A> {
541    /// Creates an empty `BinaryHeap` as a max-heap, using `A` as allocator.
542    ///
543    /// # Examples
544    ///
545    /// Basic usage:
546    ///
547    /// ```
548    /// #![feature(allocator_api)]
549    ///
550    /// use std::alloc::System;
551    /// use std::collections::BinaryHeap;
552    /// let mut heap = BinaryHeap::new_in(System);
553    /// heap.push(4);
554    /// ```
555    #[unstable(feature = "allocator_api", issue = "32838")]
556    #[must_use]
557    pub const fn new_in(alloc: A) -> BinaryHeap<T, A> {
558        BinaryHeap { data: Vec::new_in(alloc) }
559    }
560
561    /// Creates an empty `BinaryHeap` with at least the specified capacity, using `A` as allocator.
562    ///
563    /// The binary heap will be able to hold at least `capacity` elements without
564    /// reallocating. This method is allowed to allocate for more elements than
565    /// `capacity`. If `capacity` is zero, the binary heap will not allocate.
566    ///
567    /// # Examples
568    ///
569    /// Basic usage:
570    ///
571    /// ```
572    /// #![feature(allocator_api)]
573    ///
574    /// use std::alloc::System;
575    /// use std::collections::BinaryHeap;
576    /// let mut heap = BinaryHeap::with_capacity_in(10, System);
577    /// heap.push(4);
578    /// ```
579    #[unstable(feature = "allocator_api", issue = "32838")]
580    #[must_use]
581    pub fn with_capacity_in(capacity: usize, alloc: A) -> BinaryHeap<T, A> {
582        BinaryHeap { data: Vec::with_capacity_in(capacity, alloc) }
583    }
584
585    /// Creates a `BinaryHeap` using the supplied `vec`. This does not rebuild the heap,
586    /// so `vec` must already be a max-heap.
587    ///
588    /// # Safety
589    ///
590    /// The supplied `vec` must be a max-heap, i.e. for all indices `0 < i < vec.len()`,
591    /// `vec[(i - 1) / 2] >= vec[i]`.
592    ///
593    /// # Examples
594    ///
595    /// Basic usage:
596    ///
597    /// ```
598    /// #![feature(binary_heap_from_raw_vec)]
599    ///
600    /// use std::collections::BinaryHeap;
601    /// let heap = BinaryHeap::from([1, 2, 3]);
602    /// let vec = heap.into_vec();
603    ///
604    /// // Safety: vec is the output of heap.from_vec(), so is a max-heap.
605    /// let mut new_heap = unsafe {
606    ///     BinaryHeap::from_raw_vec(vec)
607    /// };
608    /// assert_eq!(new_heap.pop(), Some(3));
609    /// assert_eq!(new_heap.pop(), Some(2));
610    /// assert_eq!(new_heap.pop(), Some(1));
611    /// assert_eq!(new_heap.pop(), None);
612    /// ```
613    #[unstable(feature = "binary_heap_from_raw_vec", issue = "152500")]
614    #[must_use]
615    pub unsafe fn from_raw_vec(vec: Vec<T, A>) -> BinaryHeap<T, A> {
616        BinaryHeap { data: vec }
617    }
618}
619
620impl<T: Ord, A: Allocator> BinaryHeap<T, A> {
621    /// Returns a mutable reference to the greatest item in the binary heap, or
622    /// `None` if it is empty.
623    ///
624    /// Note: If the `PeekMut` value is leaked, some heap elements might get
625    /// leaked along with it, but the remaining elements will remain a valid
626    /// heap.
627    ///
628    /// # Examples
629    ///
630    /// Basic usage:
631    ///
632    /// ```
633    /// use std::collections::BinaryHeap;
634    /// let mut heap = BinaryHeap::new();
635    /// assert!(heap.peek_mut().is_none());
636    ///
637    /// heap.push(1);
638    /// heap.push(5);
639    /// heap.push(2);
640    /// if let Some(mut val) = heap.peek_mut() {
641    ///     *val = 0;
642    /// }
643    /// assert_eq!(heap.peek(), Some(&2));
644    /// ```
645    ///
646    /// # Time complexity
647    ///
648    /// If the item is modified then the worst case time complexity is *O*(log(*n*)),
649    /// otherwise it's *O*(1).
650    #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
651    pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T, A>> {
652        if self.is_empty() { None } else { Some(PeekMut { heap: self, original_len: None }) }
653    }
654
655    /// Removes the greatest item from the binary heap and returns it, or `None` if it
656    /// is empty.
657    ///
658    /// # Examples
659    ///
660    /// Basic usage:
661    ///
662    /// ```
663    /// use std::collections::BinaryHeap;
664    /// let mut heap = BinaryHeap::from([1, 3]);
665    ///
666    /// assert_eq!(heap.pop(), Some(3));
667    /// assert_eq!(heap.pop(), Some(1));
668    /// assert_eq!(heap.pop(), None);
669    /// ```
670    ///
671    /// # Time complexity
672    ///
673    /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
674    #[stable(feature = "rust1", since = "1.0.0")]
675    pub fn pop(&mut self) -> Option<T> {
676        self.data.pop().map(|mut item| {
677            if !self.is_empty() {
678                swap(&mut item, &mut self.data[0]);
679                // SAFETY: !self.is_empty() means that self.len() > 0
680                unsafe { self.sift_down_to_bottom(0) };
681            }
682            item
683        })
684    }
685
686    /// Removes and returns the greatest item from the binary heap if the predicate
687    /// returns `true`, or [`None`] if the predicate returns false or the heap
688    /// is empty (the predicate will not be called in that case).
689    ///
690    /// # Examples
691    ///
692    /// ```
693    /// #![feature(binary_heap_pop_if)]
694    /// use std::collections::BinaryHeap;
695    /// let mut heap = BinaryHeap::from([1, 2]);
696    /// let pred = |x: &i32| *x % 2 == 0;
697    ///
698    /// assert_eq!(heap.pop_if(pred), Some(2));
699    /// assert_eq!(heap.as_slice(), [1]);
700    /// assert_eq!(heap.pop_if(pred), None);
701    /// assert_eq!(heap.as_slice(), [1]);
702    /// ```
703    ///
704    /// # Time complexity
705    ///
706    /// The worst case cost of `pop_if` on a heap containing *n* elements is *O*(log(*n*)).
707    #[unstable(feature = "binary_heap_pop_if", issue = "151828")]
708    pub fn pop_if(&mut self, predicate: impl FnOnce(&T) -> bool) -> Option<T> {
709        let first = self.peek()?;
710        if predicate(first) { self.pop() } else { None }
711    }
712
713    /// Pushes an item onto the binary heap.
714    ///
715    /// # Examples
716    ///
717    /// Basic usage:
718    ///
719    /// ```
720    /// use std::collections::BinaryHeap;
721    /// let mut heap = BinaryHeap::new();
722    /// heap.push(3);
723    /// heap.push(5);
724    /// heap.push(1);
725    ///
726    /// assert_eq!(heap.len(), 3);
727    /// assert_eq!(heap.peek(), Some(&5));
728    /// ```
729    ///
730    /// # Time complexity
731    ///
732    /// The expected cost of `push`, averaged over every possible ordering of
733    /// the elements being pushed, and over a sufficiently large number of
734    /// pushes, is *O*(1). This is the most meaningful cost metric when pushing
735    /// elements that are *not* already in any sorted pattern.
736    ///
737    /// The time complexity degrades if elements are pushed in predominantly
738    /// ascending order. In the worst case, elements are pushed in ascending
739    /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
740    /// containing *n* elements.
741    ///
742    /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
743    /// occurs when capacity is exhausted and needs a resize. The resize cost
744    /// has been amortized in the previous figures.
745    #[stable(feature = "rust1", since = "1.0.0")]
746    #[rustc_confusables("append", "put")]
747    pub fn push(&mut self, item: T) {
748        let old_len = self.len();
749        self.data.push(item);
750        // SAFETY: Since we pushed a new item it means that
751        //  old_len = self.len() - 1 < self.len()
752        unsafe { self.sift_up(0, old_len) };
753    }
754
755    /// Consumes the `BinaryHeap` and returns a vector in sorted
756    /// (ascending) order.
757    ///
758    /// # Examples
759    ///
760    /// Basic usage:
761    ///
762    /// ```
763    /// use std::collections::BinaryHeap;
764    ///
765    /// let mut heap = BinaryHeap::from([1, 2, 4, 5, 7]);
766    /// heap.push(6);
767    /// heap.push(3);
768    ///
769    /// let vec = heap.into_sorted_vec();
770    /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
771    /// ```
772    #[must_use = "`self` will be dropped if the result is not used"]
773    #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
774    pub fn into_sorted_vec(mut self) -> Vec<T, A> {
775        let mut end = self.len();
776        while end > 1 {
777            end -= 1;
778            // SAFETY: `end` goes from `self.len() - 1` to 1 (both included),
779            //  so it's always a valid index to access.
780            //  It is safe to access index 0 (i.e. `ptr`), because
781            //  1 <= end < self.len(), which means self.len() >= 2.
782            unsafe {
783                let ptr = self.data.as_mut_ptr();
784                ptr::swap(ptr, ptr.add(end));
785            }
786            // SAFETY: `end` goes from `self.len() - 1` to 1 (both included) so:
787            //  0 < 1 <= end <= self.len() - 1 < self.len()
788            //  Which means 0 < end and end < self.len().
789            unsafe { self.sift_down_range(0, end) };
790        }
791        self.into_vec()
792    }
793
794    // The implementations of sift_up and sift_down use unsafe blocks in
795    // order to move an element out of the vector (leaving behind a
796    // hole), shift along the others and move the removed element back into the
797    // vector at the final location of the hole.
798    // The `Hole` type is used to represent this, and make sure
799    // the hole is filled back at the end of its scope, even on panic.
800    // Using a hole reduces the constant factor compared to using swaps,
801    // which involves twice as many moves.
802
803    /// # Safety
804    ///
805    /// The caller must guarantee that `pos < self.len()`.
806    ///
807    /// Returns the new position of the element.
808    unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize {
809        // Take out the value at `pos` and create a hole.
810        // SAFETY: The caller guarantees that pos < self.len()
811        let mut hole = unsafe { Hole::new(&mut self.data, pos) };
812
813        while hole.pos() > start {
814            let parent = (hole.pos() - 1) / 2;
815
816            // SAFETY: hole.pos() > start >= 0, which means hole.pos() > 0
817            //  and so hole.pos() - 1 can't underflow.
818            //  This guarantees that parent < hole.pos() so
819            //  it's a valid index and also != hole.pos().
820            if hole.element() <= unsafe { hole.get(parent) } {
821                break;
822            }
823
824            // SAFETY: Same as above
825            unsafe { hole.move_to(parent) };
826        }
827
828        hole.pos()
829    }
830
831    /// Take an element at `pos` and move it down the heap,
832    /// while its children are larger.
833    ///
834    /// Returns the new position of the element.
835    ///
836    /// # Safety
837    ///
838    /// The caller must guarantee that `pos < end <= self.len()`.
839    unsafe fn sift_down_range(&mut self, pos: usize, end: usize) -> usize {
840        // SAFETY: The caller guarantees that pos < end <= self.len().
841        let mut hole = unsafe { Hole::new(&mut self.data, pos) };
842        let mut child = 2 * hole.pos() + 1;
843
844        // Loop invariant: child == 2 * hole.pos() + 1.
845        while child <= end.saturating_sub(2) {
846            // compare with the greater of the two children
847            // SAFETY: child < end - 1 < self.len() and
848            //  child + 1 < end <= self.len(), so they're valid indexes.
849            //  child == 2 * hole.pos() + 1 != hole.pos() and
850            //  child + 1 == 2 * hole.pos() + 2 != hole.pos().
851            // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
852            //  if T is a ZST
853            child += unsafe { hole.get(child) <= hole.get(child + 1) } as usize;
854
855            // if we are already in order, stop.
856            // SAFETY: child is now either the old child or the old child+1
857            //  We already proven that both are < self.len() and != hole.pos()
858            if hole.element() >= unsafe { hole.get(child) } {
859                return hole.pos();
860            }
861
862            // SAFETY: same as above.
863            unsafe { hole.move_to(child) };
864            child = 2 * hole.pos() + 1;
865        }
866
867        // SAFETY: && short circuit, which means that in the
868        //  second condition it's already true that child == end - 1 < self.len().
869        if child == end - 1 && hole.element() < unsafe { hole.get(child) } {
870            // SAFETY: child is already proven to be a valid index and
871            //  child == 2 * hole.pos() + 1 != hole.pos().
872            unsafe { hole.move_to(child) };
873        }
874
875        hole.pos()
876    }
877
878    /// # Safety
879    ///
880    /// The caller must guarantee that `pos < self.len()`.
881    unsafe fn sift_down(&mut self, pos: usize) -> usize {
882        let len = self.len();
883        // SAFETY: pos < len is guaranteed by the caller and
884        //  obviously len = self.len() <= self.len().
885        unsafe { self.sift_down_range(pos, len) }
886    }
887
888    /// Take an element at `pos` and move it all the way down the heap,
889    /// then sift it up to its position.
890    ///
891    /// Note: This is faster when the element is known to be large / should
892    /// be closer to the bottom.
893    ///
894    /// # Safety
895    ///
896    /// The caller must guarantee that `pos < self.len()`.
897    unsafe fn sift_down_to_bottom(&mut self, mut pos: usize) {
898        let end = self.len();
899        let start = pos;
900
901        // SAFETY: The caller guarantees that pos < self.len().
902        let mut hole = unsafe { Hole::new(&mut self.data, pos) };
903        let mut child = 2 * hole.pos() + 1;
904
905        // Loop invariant: child == 2 * hole.pos() + 1.
906        while child <= end.saturating_sub(2) {
907            // SAFETY: child < end - 1 < self.len() and
908            //  child + 1 < end <= self.len(), so they're valid indexes.
909            //  child == 2 * hole.pos() + 1 != hole.pos() and
910            //  child + 1 == 2 * hole.pos() + 2 != hole.pos().
911            // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
912            //  if T is a ZST
913            child += unsafe { hole.get(child) <= hole.get(child + 1) } as usize;
914
915            // SAFETY: Same as above
916            unsafe { hole.move_to(child) };
917            child = 2 * hole.pos() + 1;
918        }
919
920        if child == end - 1 {
921            // SAFETY: child == end - 1 < self.len(), so it's a valid index
922            //  and child == 2 * hole.pos() + 1 != hole.pos().
923            unsafe { hole.move_to(child) };
924        }
925        pos = hole.pos();
926        drop(hole);
927
928        // SAFETY: pos is the position in the hole and was already proven
929        //  to be a valid index.
930        unsafe { self.sift_up(start, pos) };
931    }
932
933    /// Rebuild assuming data[0..start] is still a proper heap.
934    fn rebuild_tail(&mut self, start: usize) {
935        if start == self.len() {
936            return;
937        }
938
939        let tail_len = self.len() - start;
940
941        #[inline(always)]
942        fn log2_fast(x: usize) -> usize {
943            (usize::BITS - x.leading_zeros() - 1) as usize
944        }
945
946        // `rebuild` takes O(self.len()) operations
947        // and about 2 * self.len() comparisons in the worst case
948        // while repeating `sift_up` takes O(tail_len * log(start)) operations
949        // and about 1 * tail_len * log_2(start) comparisons in the worst case,
950        // assuming start >= tail_len. For larger heaps, the crossover point
951        // no longer follows this reasoning and was determined empirically.
952        let better_to_rebuild = if start < tail_len {
953            true
954        } else if self.len() <= 2048 {
955            2 * self.len() < tail_len * log2_fast(start)
956        } else {
957            2 * self.len() < tail_len * 11
958        };
959
960        if better_to_rebuild {
961            self.rebuild();
962        } else {
963            for i in start..self.len() {
964                // SAFETY: The index `i` is always less than self.len().
965                unsafe { self.sift_up(0, i) };
966            }
967        }
968    }
969
970    fn rebuild(&mut self) {
971        let mut n = self.len() / 2;
972        while n > 0 {
973            n -= 1;
974            // SAFETY: n starts from self.len() / 2 and goes down to 0.
975            //  The only case when !(n < self.len()) is if
976            //  self.len() == 0, but it's ruled out by the loop condition.
977            unsafe { self.sift_down(n) };
978        }
979    }
980
981    /// Moves all the elements of `other` into `self`, leaving `other` empty.
982    ///
983    /// # Examples
984    ///
985    /// Basic usage:
986    ///
987    /// ```
988    /// use std::collections::BinaryHeap;
989    ///
990    /// let mut a = BinaryHeap::from([-10, 1, 2, 3, 3]);
991    /// let mut b = BinaryHeap::from([-20, 5, 43]);
992    ///
993    /// a.append(&mut b);
994    ///
995    /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
996    /// assert!(b.is_empty());
997    /// ```
998    #[stable(feature = "binary_heap_append", since = "1.11.0")]
999    pub fn append(&mut self, other: &mut Self) {
1000        if self.len() < other.len() {
1001            swap(self, other);
1002        }
1003
1004        let start = self.data.len();
1005
1006        self.data.append(&mut other.data);
1007
1008        self.rebuild_tail(start);
1009    }
1010
1011    /// Clears the binary heap, returning an iterator over the removed elements
1012    /// in heap order. If the iterator is dropped before being fully consumed,
1013    /// it drops the remaining elements in heap order.
1014    ///
1015    /// The returned iterator keeps a mutable borrow on the heap to optimize
1016    /// its implementation.
1017    ///
1018    /// Note:
1019    /// * `.drain_sorted()` is *O*(*n* \* log(*n*)); much slower than `.drain()`.
1020    ///   You should use the latter for most cases.
1021    ///
1022    /// # Examples
1023    ///
1024    /// Basic usage:
1025    ///
1026    /// ```
1027    /// #![feature(binary_heap_drain_sorted)]
1028    /// use std::collections::BinaryHeap;
1029    ///
1030    /// let mut heap = BinaryHeap::from([1, 2, 3, 4, 5]);
1031    /// assert_eq!(heap.len(), 5);
1032    ///
1033    /// drop(heap.drain_sorted()); // removes all elements in heap order
1034    /// assert_eq!(heap.len(), 0);
1035    /// ```
1036    #[inline]
1037    #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1038    pub fn drain_sorted(&mut self) -> DrainSorted<'_, T, A> {
1039        DrainSorted { inner: self }
1040    }
1041
1042    /// Retains only the elements specified by the predicate.
1043    ///
1044    /// In other words, remove all elements `e` for which `f(&e)` returns
1045    /// `false`. The elements are visited in unsorted (and unspecified) order.
1046    ///
1047    /// # Examples
1048    ///
1049    /// Basic usage:
1050    ///
1051    /// ```
1052    /// use std::collections::BinaryHeap;
1053    ///
1054    /// let mut heap = BinaryHeap::from([-10, -5, 1, 2, 4, 13]);
1055    ///
1056    /// heap.retain(|x| x % 2 == 0); // only keep even numbers
1057    ///
1058    /// assert_eq!(heap.into_sorted_vec(), [-10, 2, 4])
1059    /// ```
1060    #[stable(feature = "binary_heap_retain", since = "1.70.0")]
1061    pub fn retain<F>(&mut self, mut f: F)
1062    where
1063        F: FnMut(&T) -> bool,
1064    {
1065        // rebuild_start will be updated to the first touched element below, and the rebuild will
1066        // only be done for the tail.
1067        let mut guard = RebuildOnDrop { rebuild_from: self.len(), heap: self };
1068        let mut i = 0;
1069
1070        guard.heap.data.retain(|e| {
1071            let keep = f(e);
1072            if !keep && i < guard.rebuild_from {
1073                guard.rebuild_from = i;
1074            }
1075            i += 1;
1076            keep
1077        });
1078    }
1079}
1080
1081impl<T, A: Allocator> BinaryHeap<T, A> {
1082    /// Returns an iterator visiting all values in the underlying vector, in
1083    /// arbitrary order.
1084    ///
1085    /// # Examples
1086    ///
1087    /// Basic usage:
1088    ///
1089    /// ```
1090    /// use std::collections::BinaryHeap;
1091    /// let heap = BinaryHeap::from([1, 2, 3, 4]);
1092    ///
1093    /// // Print 1, 2, 3, 4 in arbitrary order
1094    /// for x in heap.iter() {
1095    ///     println!("{x}");
1096    /// }
1097    /// ```
1098    #[stable(feature = "rust1", since = "1.0.0")]
1099    #[cfg_attr(not(test), rustc_diagnostic_item = "binaryheap_iter")]
1100    pub fn iter(&self) -> Iter<'_, T> {
1101        Iter { iter: self.data.iter() }
1102    }
1103
1104    /// Returns an iterator which retrieves elements in heap order.
1105    ///
1106    /// This method consumes the original heap.
1107    ///
1108    /// # Examples
1109    ///
1110    /// Basic usage:
1111    ///
1112    /// ```
1113    /// #![feature(binary_heap_into_iter_sorted)]
1114    /// use std::collections::BinaryHeap;
1115    /// let heap = BinaryHeap::from([1, 2, 3, 4, 5]);
1116    ///
1117    /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), [5, 4]);
1118    /// ```
1119    #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1120    pub fn into_iter_sorted(self) -> IntoIterSorted<T, A> {
1121        IntoIterSorted { inner: self }
1122    }
1123
1124    /// Returns the greatest item in the binary heap, or `None` if it is empty.
1125    ///
1126    /// # Examples
1127    ///
1128    /// Basic usage:
1129    ///
1130    /// ```
1131    /// use std::collections::BinaryHeap;
1132    /// let mut heap = BinaryHeap::new();
1133    /// assert_eq!(heap.peek(), None);
1134    ///
1135    /// heap.push(1);
1136    /// heap.push(5);
1137    /// heap.push(2);
1138    /// assert_eq!(heap.peek(), Some(&5));
1139    ///
1140    /// ```
1141    ///
1142    /// # Time complexity
1143    ///
1144    /// Cost is *O*(1) in the worst case.
1145    #[must_use]
1146    #[stable(feature = "rust1", since = "1.0.0")]
1147    pub fn peek(&self) -> Option<&T> {
1148        self.data.get(0)
1149    }
1150
1151    /// Returns the number of elements the binary heap can hold without reallocating.
1152    ///
1153    /// # Examples
1154    ///
1155    /// Basic usage:
1156    ///
1157    /// ```
1158    /// use std::collections::BinaryHeap;
1159    /// let mut heap = BinaryHeap::with_capacity(100);
1160    /// assert!(heap.capacity() >= 100);
1161    /// heap.push(4);
1162    /// ```
1163    #[must_use]
1164    #[stable(feature = "rust1", since = "1.0.0")]
1165    pub fn capacity(&self) -> usize {
1166        self.data.capacity()
1167    }
1168
1169    /// Reserves the minimum capacity for at least `additional` elements more than
1170    /// the current length. Unlike [`reserve`], this will not
1171    /// deliberately over-allocate to speculatively avoid frequent allocations.
1172    /// After calling `reserve_exact`, capacity will be greater than or equal to
1173    /// `self.len() + additional`. Does nothing if the capacity is already
1174    /// sufficient.
1175    ///
1176    /// [`reserve`]: BinaryHeap::reserve
1177    ///
1178    /// # Panics
1179    ///
1180    /// Panics if the new capacity overflows [`usize`].
1181    ///
1182    /// # Examples
1183    ///
1184    /// Basic usage:
1185    ///
1186    /// ```
1187    /// use std::collections::BinaryHeap;
1188    /// let mut heap = BinaryHeap::new();
1189    /// heap.reserve_exact(100);
1190    /// assert!(heap.capacity() >= 100);
1191    /// heap.push(4);
1192    /// ```
1193    ///
1194    /// [`reserve`]: BinaryHeap::reserve
1195    #[stable(feature = "rust1", since = "1.0.0")]
1196    pub fn reserve_exact(&mut self, additional: usize) {
1197        self.data.reserve_exact(additional);
1198    }
1199
1200    /// Reserves capacity for at least `additional` elements more than the
1201    /// current length. The allocator may reserve more space to speculatively
1202    /// avoid frequent allocations. After calling `reserve`,
1203    /// capacity will be greater than or equal to `self.len() + additional`.
1204    /// Does nothing if capacity is already sufficient.
1205    ///
1206    /// # Panics
1207    ///
1208    /// Panics if the new capacity overflows [`usize`].
1209    ///
1210    /// # Examples
1211    ///
1212    /// Basic usage:
1213    ///
1214    /// ```
1215    /// use std::collections::BinaryHeap;
1216    /// let mut heap = BinaryHeap::new();
1217    /// heap.reserve(100);
1218    /// assert!(heap.capacity() >= 100);
1219    /// heap.push(4);
1220    /// ```
1221    #[stable(feature = "rust1", since = "1.0.0")]
1222    pub fn reserve(&mut self, additional: usize) {
1223        self.data.reserve(additional);
1224    }
1225
1226    /// Tries to reserve the minimum capacity for at least `additional` elements
1227    /// more than the current length. Unlike [`try_reserve`], this will not
1228    /// deliberately over-allocate to speculatively avoid frequent allocations.
1229    /// After calling `try_reserve_exact`, capacity will be greater than or
1230    /// equal to `self.len() + additional` if it returns `Ok(())`.
1231    /// Does nothing if the capacity is already sufficient.
1232    ///
1233    /// Note that the allocator may give the collection more space than it
1234    /// requests. Therefore, capacity can not be relied upon to be precisely
1235    /// minimal. Prefer [`try_reserve`] if future insertions are expected.
1236    ///
1237    /// [`try_reserve`]: BinaryHeap::try_reserve
1238    ///
1239    /// # Errors
1240    ///
1241    /// If the capacity overflows, or the allocator reports a failure, then an error
1242    /// is returned.
1243    ///
1244    /// # Examples
1245    ///
1246    /// ```
1247    /// use std::collections::BinaryHeap;
1248    /// use std::collections::TryReserveError;
1249    ///
1250    /// fn find_max_slow(data: &[u32]) -> Result<Option<u32>, TryReserveError> {
1251    ///     let mut heap = BinaryHeap::new();
1252    ///
1253    ///     // Pre-reserve the memory, exiting if we can't
1254    ///     heap.try_reserve_exact(data.len())?;
1255    ///
1256    ///     // Now we know this can't OOM in the middle of our complex work
1257    ///     heap.extend(data.iter());
1258    ///
1259    ///     Ok(heap.pop())
1260    /// }
1261    /// # find_max_slow(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
1262    /// ```
1263    #[stable(feature = "try_reserve_2", since = "1.63.0")]
1264    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
1265        self.data.try_reserve_exact(additional)
1266    }
1267
1268    /// Tries to reserve capacity for at least `additional` elements more than the
1269    /// current length. The allocator may reserve more space to speculatively
1270    /// avoid frequent allocations. After calling `try_reserve`, capacity will be
1271    /// greater than or equal to `self.len() + additional` if it returns
1272    /// `Ok(())`. Does nothing if capacity is already sufficient. This method
1273    /// preserves the contents even if an error occurs.
1274    ///
1275    /// # Errors
1276    ///
1277    /// If the capacity overflows, or the allocator reports a failure, then an error
1278    /// is returned.
1279    ///
1280    /// # Examples
1281    ///
1282    /// ```
1283    /// use std::collections::BinaryHeap;
1284    /// use std::collections::TryReserveError;
1285    ///
1286    /// fn find_max_slow(data: &[u32]) -> Result<Option<u32>, TryReserveError> {
1287    ///     let mut heap = BinaryHeap::new();
1288    ///
1289    ///     // Pre-reserve the memory, exiting if we can't
1290    ///     heap.try_reserve(data.len())?;
1291    ///
1292    ///     // Now we know this can't OOM in the middle of our complex work
1293    ///     heap.extend(data.iter());
1294    ///
1295    ///     Ok(heap.pop())
1296    /// }
1297    /// # find_max_slow(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
1298    /// ```
1299    #[stable(feature = "try_reserve_2", since = "1.63.0")]
1300    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
1301        self.data.try_reserve(additional)
1302    }
1303
1304    /// Discards as much additional capacity as possible.
1305    ///
1306    /// # Examples
1307    ///
1308    /// Basic usage:
1309    ///
1310    /// ```
1311    /// use std::collections::BinaryHeap;
1312    /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
1313    ///
1314    /// assert!(heap.capacity() >= 100);
1315    /// heap.shrink_to_fit();
1316    /// assert!(heap.capacity() == 0);
1317    /// ```
1318    #[stable(feature = "rust1", since = "1.0.0")]
1319    pub fn shrink_to_fit(&mut self) {
1320        self.data.shrink_to_fit();
1321    }
1322
1323    /// Discards capacity with a lower bound.
1324    ///
1325    /// The capacity will remain at least as large as both the length
1326    /// and the supplied value.
1327    ///
1328    /// If the current capacity is less than the lower limit, this is a no-op.
1329    ///
1330    /// # Examples
1331    ///
1332    /// ```
1333    /// use std::collections::BinaryHeap;
1334    /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
1335    ///
1336    /// assert!(heap.capacity() >= 100);
1337    /// heap.shrink_to(10);
1338    /// assert!(heap.capacity() >= 10);
1339    /// ```
1340    #[inline]
1341    #[stable(feature = "shrink_to", since = "1.56.0")]
1342    pub fn shrink_to(&mut self, min_capacity: usize) {
1343        self.data.shrink_to(min_capacity)
1344    }
1345
1346    /// Returns a slice of all values in the underlying vector, in arbitrary
1347    /// order.
1348    ///
1349    /// # Examples
1350    ///
1351    /// Basic usage:
1352    ///
1353    /// ```
1354    /// use std::collections::BinaryHeap;
1355    /// use std::io::{self, Write};
1356    ///
1357    /// let heap = BinaryHeap::from([1, 2, 3, 4, 5, 6, 7]);
1358    ///
1359    /// io::sink().write(heap.as_slice()).unwrap();
1360    /// ```
1361    #[must_use]
1362    #[stable(feature = "binary_heap_as_slice", since = "1.80.0")]
1363    pub fn as_slice(&self) -> &[T] {
1364        self.data.as_slice()
1365    }
1366
1367    /// Returns a mutable slice of all values in the underlying vector.
1368    ///
1369    /// # Safety
1370    ///
1371    /// The caller must ensure that the slice remains a max-heap, i.e. for all indices
1372    /// `0 < i < slice.len()`, `slice[(i - 1) / 2] >= slice[i]`, before the borrow ends
1373    /// and the binary heap is used.
1374    ///
1375    /// # Examples
1376    ///
1377    /// Basic usage:
1378    ///
1379    /// ```
1380    /// #![feature(binary_heap_as_mut_slice)]
1381    ///
1382    /// use std::collections::BinaryHeap;
1383    ///
1384    /// let mut heap = BinaryHeap::<u32>::from([1, 2, 3, 4, 5, 6, 7]);
1385    ///
1386    /// unsafe {
1387    ///     for value in heap.as_mut_slice() {
1388    ///         *value = (*value).saturating_mul(2);
1389    ///     }
1390    /// }
1391    /// ```
1392    #[must_use]
1393    #[unstable(feature = "binary_heap_as_mut_slice", issue = "154009")]
1394    pub unsafe fn as_mut_slice(&mut self) -> &mut [T] {
1395        self.data.as_mut_slice()
1396    }
1397
1398    /// Consumes the `BinaryHeap` and returns the underlying vector
1399    /// in arbitrary order.
1400    ///
1401    /// # Examples
1402    ///
1403    /// Basic usage:
1404    ///
1405    /// ```
1406    /// use std::collections::BinaryHeap;
1407    /// let heap = BinaryHeap::from([1, 2, 3, 4, 5, 6, 7]);
1408    /// let vec = heap.into_vec();
1409    ///
1410    /// // Will print in some order
1411    /// for x in vec {
1412    ///     println!("{x}");
1413    /// }
1414    /// ```
1415    #[must_use = "`self` will be dropped if the result is not used"]
1416    #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1417    pub fn into_vec(self) -> Vec<T, A> {
1418        self.into()
1419    }
1420
1421    /// Returns a reference to the underlying allocator.
1422    #[unstable(feature = "allocator_api", issue = "32838")]
1423    #[inline]
1424    pub fn allocator(&self) -> &A {
1425        self.data.allocator()
1426    }
1427
1428    /// Returns the length of the binary heap.
1429    ///
1430    /// # Examples
1431    ///
1432    /// Basic usage:
1433    ///
1434    /// ```
1435    /// use std::collections::BinaryHeap;
1436    /// let heap = BinaryHeap::from([1, 3]);
1437    ///
1438    /// assert_eq!(heap.len(), 2);
1439    /// ```
1440    #[must_use]
1441    #[stable(feature = "rust1", since = "1.0.0")]
1442    #[rustc_confusables("length", "size")]
1443    pub fn len(&self) -> usize {
1444        self.data.len()
1445    }
1446
1447    /// Checks if the binary heap is empty.
1448    ///
1449    /// # Examples
1450    ///
1451    /// Basic usage:
1452    ///
1453    /// ```
1454    /// use std::collections::BinaryHeap;
1455    /// let mut heap = BinaryHeap::new();
1456    ///
1457    /// assert!(heap.is_empty());
1458    ///
1459    /// heap.push(3);
1460    /// heap.push(5);
1461    /// heap.push(1);
1462    ///
1463    /// assert!(!heap.is_empty());
1464    /// ```
1465    #[must_use]
1466    #[stable(feature = "rust1", since = "1.0.0")]
1467    pub fn is_empty(&self) -> bool {
1468        self.len() == 0
1469    }
1470
1471    /// Clears the binary heap, returning an iterator over the removed elements
1472    /// in arbitrary order. If the iterator is dropped before being fully
1473    /// consumed, it drops the remaining elements in arbitrary order.
1474    ///
1475    /// The returned iterator keeps a mutable borrow on the heap to optimize
1476    /// its implementation.
1477    ///
1478    /// # Examples
1479    ///
1480    /// Basic usage:
1481    ///
1482    /// ```
1483    /// use std::collections::BinaryHeap;
1484    /// let mut heap = BinaryHeap::from([1, 3]);
1485    ///
1486    /// assert!(!heap.is_empty());
1487    ///
1488    /// for x in heap.drain() {
1489    ///     println!("{x}");
1490    /// }
1491    ///
1492    /// assert!(heap.is_empty());
1493    /// ```
1494    #[inline]
1495    #[stable(feature = "drain", since = "1.6.0")]
1496    pub fn drain(&mut self) -> Drain<'_, T, A> {
1497        Drain { iter: self.data.drain(..) }
1498    }
1499
1500    /// Drops all items from the binary heap.
1501    ///
1502    /// # Examples
1503    ///
1504    /// Basic usage:
1505    ///
1506    /// ```
1507    /// use std::collections::BinaryHeap;
1508    /// let mut heap = BinaryHeap::from([1, 3]);
1509    ///
1510    /// assert!(!heap.is_empty());
1511    ///
1512    /// heap.clear();
1513    ///
1514    /// assert!(heap.is_empty());
1515    /// ```
1516    #[stable(feature = "rust1", since = "1.0.0")]
1517    pub fn clear(&mut self) {
1518        self.drain();
1519    }
1520}
1521
1522/// Hole represents a hole in a slice i.e., an index without valid value
1523/// (because it was moved from or duplicated).
1524/// In drop, `Hole` will restore the slice by filling the hole
1525/// position with the value that was originally removed.
1526struct Hole<'a, T: 'a> {
1527    data: &'a mut [T],
1528    elt: ManuallyDrop<T>,
1529    pos: usize,
1530}
1531
1532impl<'a, T> Hole<'a, T> {
1533    /// Creates a new `Hole` at index `pos`.
1534    ///
1535    /// Unsafe because pos must be within the data slice.
1536    #[inline]
1537    unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
1538        debug_assert!(pos < data.len());
1539        // SAFE: pos should be inside the slice
1540        let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
1541        Hole { data, elt: ManuallyDrop::new(elt), pos }
1542    }
1543
1544    #[inline]
1545    fn pos(&self) -> usize {
1546        self.pos
1547    }
1548
1549    /// Returns a reference to the element removed.
1550    #[inline]
1551    fn element(&self) -> &T {
1552        &self.elt
1553    }
1554
1555    /// Returns a reference to the element at `index`.
1556    ///
1557    /// Unsafe because index must be within the data slice and not equal to pos.
1558    #[inline]
1559    unsafe fn get(&self, index: usize) -> &T {
1560        debug_assert!(index != self.pos);
1561        debug_assert!(index < self.data.len());
1562        unsafe { self.data.get_unchecked(index) }
1563    }
1564
1565    /// Move hole to new location
1566    ///
1567    /// Unsafe because index must be within the data slice and not equal to pos.
1568    #[inline]
1569    unsafe fn move_to(&mut self, index: usize) {
1570        debug_assert!(index != self.pos);
1571        debug_assert!(index < self.data.len());
1572        unsafe {
1573            let ptr = self.data.as_mut_ptr();
1574            let index_ptr: *const _ = ptr.add(index);
1575            let hole_ptr = ptr.add(self.pos);
1576            ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
1577        }
1578        self.pos = index;
1579    }
1580}
1581
1582impl<T> Drop for Hole<'_, T> {
1583    #[inline]
1584    fn drop(&mut self) {
1585        // fill the hole again
1586        unsafe {
1587            let pos = self.pos;
1588            ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
1589        }
1590    }
1591}
1592
1593/// An iterator over the elements of a `BinaryHeap`.
1594///
1595/// This `struct` is created by [`BinaryHeap::iter()`]. See its
1596/// documentation for more.
1597///
1598/// [`iter`]: BinaryHeap::iter
1599#[must_use = "iterators are lazy and do nothing unless consumed"]
1600#[stable(feature = "rust1", since = "1.0.0")]
1601pub struct Iter<'a, T: 'a> {
1602    iter: slice::Iter<'a, T>,
1603}
1604
1605#[stable(feature = "default_iters_sequel", since = "1.82.0")]
1606impl<T> Default for Iter<'_, T> {
1607    /// Creates an empty `binary_heap::Iter`.
1608    ///
1609    /// ```
1610    /// # use std::collections::binary_heap;
1611    /// let iter: binary_heap::Iter<'_, u8> = Default::default();
1612    /// assert_eq!(iter.len(), 0);
1613    /// ```
1614    fn default() -> Self {
1615        Iter { iter: Default::default() }
1616    }
1617}
1618
1619#[stable(feature = "collection_debug", since = "1.17.0")]
1620impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
1621    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1622        f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
1623    }
1624}
1625
1626// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
1627#[stable(feature = "rust1", since = "1.0.0")]
1628impl<T> Clone for Iter<'_, T> {
1629    fn clone(&self) -> Self {
1630        Iter { iter: self.iter.clone() }
1631    }
1632}
1633
1634#[stable(feature = "rust1", since = "1.0.0")]
1635impl<'a, T> Iterator for Iter<'a, T> {
1636    type Item = &'a T;
1637
1638    #[inline]
1639    fn next(&mut self) -> Option<&'a T> {
1640        self.iter.next()
1641    }
1642
1643    #[inline]
1644    fn size_hint(&self) -> (usize, Option<usize>) {
1645        self.iter.size_hint()
1646    }
1647
1648    #[inline]
1649    fn last(self) -> Option<&'a T> {
1650        self.iter.last()
1651    }
1652}
1653
1654#[stable(feature = "rust1", since = "1.0.0")]
1655impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
1656    #[inline]
1657    fn next_back(&mut self) -> Option<&'a T> {
1658        self.iter.next_back()
1659    }
1660}
1661
1662#[stable(feature = "rust1", since = "1.0.0")]
1663impl<T> ExactSizeIterator for Iter<'_, T> {
1664    fn is_empty(&self) -> bool {
1665        self.iter.is_empty()
1666    }
1667}
1668
1669#[stable(feature = "fused", since = "1.26.0")]
1670impl<T> FusedIterator for Iter<'_, T> {}
1671
1672/// An owning iterator over the elements of a `BinaryHeap`.
1673///
1674/// This `struct` is created by [`BinaryHeap::into_iter()`]
1675/// (provided by the [`IntoIterator`] trait). See its documentation for more.
1676///
1677/// [`into_iter`]: BinaryHeap::into_iter
1678#[stable(feature = "rust1", since = "1.0.0")]
1679#[derive(Clone)]
1680pub struct IntoIter<
1681    T,
1682    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1683> {
1684    iter: vec::IntoIter<T, A>,
1685}
1686
1687impl<T, A: Allocator> IntoIter<T, A> {
1688    /// Returns a reference to the underlying allocator.
1689    #[unstable(feature = "allocator_api", issue = "32838")]
1690    pub fn allocator(&self) -> &A {
1691        self.iter.allocator()
1692    }
1693}
1694
1695#[stable(feature = "collection_debug", since = "1.17.0")]
1696impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
1697    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1698        f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish()
1699    }
1700}
1701
1702#[stable(feature = "rust1", since = "1.0.0")]
1703impl<T, A: Allocator> Iterator for IntoIter<T, A> {
1704    type Item = T;
1705
1706    #[inline]
1707    fn next(&mut self) -> Option<T> {
1708        self.iter.next()
1709    }
1710
1711    #[inline]
1712    fn size_hint(&self) -> (usize, Option<usize>) {
1713        self.iter.size_hint()
1714    }
1715}
1716
1717#[stable(feature = "rust1", since = "1.0.0")]
1718impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
1719    #[inline]
1720    fn next_back(&mut self) -> Option<T> {
1721        self.iter.next_back()
1722    }
1723}
1724
1725#[stable(feature = "rust1", since = "1.0.0")]
1726impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
1727    fn is_empty(&self) -> bool {
1728        self.iter.is_empty()
1729    }
1730}
1731
1732#[stable(feature = "fused", since = "1.26.0")]
1733impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
1734
1735#[doc(hidden)]
1736#[unstable(issue = "none", feature = "trusted_fused")]
1737unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {}
1738
1739#[stable(feature = "default_iters", since = "1.70.0")]
1740impl<T> Default for IntoIter<T> {
1741    /// Creates an empty `binary_heap::IntoIter`.
1742    ///
1743    /// ```
1744    /// # use std::collections::binary_heap;
1745    /// let iter: binary_heap::IntoIter<u8> = Default::default();
1746    /// assert_eq!(iter.len(), 0);
1747    /// ```
1748    fn default() -> Self {
1749        IntoIter { iter: Default::default() }
1750    }
1751}
1752
1753// In addition to the SAFETY invariants of the following three unsafe traits
1754// also refer to the vec::in_place_collect module documentation to get an overview
1755#[unstable(issue = "none", feature = "inplace_iteration")]
1756#[doc(hidden)]
1757unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
1758    type Source = IntoIter<T, A>;
1759
1760    #[inline]
1761    unsafe fn as_inner(&mut self) -> &mut Self::Source {
1762        self
1763    }
1764}
1765
1766#[unstable(issue = "none", feature = "inplace_iteration")]
1767#[doc(hidden)]
1768unsafe impl<I, A: Allocator> InPlaceIterable for IntoIter<I, A> {
1769    const EXPAND_BY: Option<NonZero<usize>> = NonZero::new(1);
1770    const MERGE_BY: Option<NonZero<usize>> = NonZero::new(1);
1771}
1772
1773#[cfg(not(test))]
1774unsafe impl<I> AsVecIntoIter for IntoIter<I> {
1775    type Item = I;
1776
1777    fn as_into_iter(&mut self) -> &mut vec::IntoIter<Self::Item> {
1778        &mut self.iter
1779    }
1780}
1781
1782#[must_use = "iterators are lazy and do nothing unless consumed"]
1783#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1784#[derive(Clone, Debug)]
1785pub struct IntoIterSorted<
1786    T,
1787    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1788> {
1789    inner: BinaryHeap<T, A>,
1790}
1791
1792impl<T, A: Allocator> IntoIterSorted<T, A> {
1793    /// Returns a reference to the underlying allocator.
1794    #[unstable(feature = "allocator_api", issue = "32838")]
1795    pub fn allocator(&self) -> &A {
1796        self.inner.allocator()
1797    }
1798}
1799
1800#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1801impl<T: Ord, A: Allocator> Iterator for IntoIterSorted<T, A> {
1802    type Item = T;
1803
1804    #[inline]
1805    fn next(&mut self) -> Option<T> {
1806        self.inner.pop()
1807    }
1808
1809    #[inline]
1810    fn size_hint(&self) -> (usize, Option<usize>) {
1811        let exact = self.inner.len();
1812        (exact, Some(exact))
1813    }
1814}
1815
1816#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1817impl<T: Ord, A: Allocator> ExactSizeIterator for IntoIterSorted<T, A> {}
1818
1819#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1820impl<T: Ord, A: Allocator> FusedIterator for IntoIterSorted<T, A> {}
1821
1822#[unstable(feature = "trusted_len", issue = "37572")]
1823unsafe impl<T: Ord, A: Allocator> TrustedLen for IntoIterSorted<T, A> {}
1824
1825/// A draining iterator over the elements of a `BinaryHeap`.
1826///
1827/// This `struct` is created by [`BinaryHeap::drain()`]. See its
1828/// documentation for more.
1829///
1830/// [`drain`]: BinaryHeap::drain
1831#[stable(feature = "drain", since = "1.6.0")]
1832#[derive(Debug)]
1833pub struct Drain<
1834    'a,
1835    T: 'a,
1836    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1837> {
1838    iter: vec::Drain<'a, T, A>,
1839}
1840
1841impl<T, A: Allocator> Drain<'_, T, A> {
1842    /// Returns a reference to the underlying allocator.
1843    #[unstable(feature = "allocator_api", issue = "32838")]
1844    pub fn allocator(&self) -> &A {
1845        self.iter.allocator()
1846    }
1847}
1848
1849#[stable(feature = "drain", since = "1.6.0")]
1850impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
1851    type Item = T;
1852
1853    #[inline]
1854    fn next(&mut self) -> Option<T> {
1855        self.iter.next()
1856    }
1857
1858    #[inline]
1859    fn size_hint(&self) -> (usize, Option<usize>) {
1860        self.iter.size_hint()
1861    }
1862}
1863
1864#[stable(feature = "drain", since = "1.6.0")]
1865impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
1866    #[inline]
1867    fn next_back(&mut self) -> Option<T> {
1868        self.iter.next_back()
1869    }
1870}
1871
1872#[stable(feature = "drain", since = "1.6.0")]
1873impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {
1874    fn is_empty(&self) -> bool {
1875        self.iter.is_empty()
1876    }
1877}
1878
1879#[stable(feature = "fused", since = "1.26.0")]
1880impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}
1881
1882/// A draining iterator over the elements of a `BinaryHeap`.
1883///
1884/// This `struct` is created by [`BinaryHeap::drain_sorted()`]. See its
1885/// documentation for more.
1886///
1887/// [`drain_sorted`]: BinaryHeap::drain_sorted
1888#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1889#[derive(Debug)]
1890pub struct DrainSorted<
1891    'a,
1892    T: Ord,
1893    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1894> {
1895    inner: &'a mut BinaryHeap<T, A>,
1896}
1897
1898impl<'a, T: Ord, A: Allocator> DrainSorted<'a, T, A> {
1899    /// Returns a reference to the underlying allocator.
1900    #[unstable(feature = "allocator_api", issue = "32838")]
1901    pub fn allocator(&self) -> &A {
1902        self.inner.allocator()
1903    }
1904}
1905
1906#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1907impl<'a, T: Ord, A: Allocator> Drop for DrainSorted<'a, T, A> {
1908    /// Removes heap elements in heap order.
1909    fn drop(&mut self) {
1910        struct DropGuard<'r, 'a, T: Ord, A: Allocator>(&'r mut DrainSorted<'a, T, A>);
1911
1912        impl<'r, 'a, T: Ord, A: Allocator> Drop for DropGuard<'r, 'a, T, A> {
1913            fn drop(&mut self) {
1914                while self.0.inner.pop().is_some() {}
1915            }
1916        }
1917
1918        while let Some(item) = self.inner.pop() {
1919            let guard = DropGuard(self);
1920            drop(item);
1921            mem::forget(guard);
1922        }
1923    }
1924}
1925
1926#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1927impl<T: Ord, A: Allocator> Iterator for DrainSorted<'_, T, A> {
1928    type Item = T;
1929
1930    #[inline]
1931    fn next(&mut self) -> Option<T> {
1932        self.inner.pop()
1933    }
1934
1935    #[inline]
1936    fn size_hint(&self) -> (usize, Option<usize>) {
1937        let exact = self.inner.len();
1938        (exact, Some(exact))
1939    }
1940}
1941
1942#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1943impl<T: Ord, A: Allocator> ExactSizeIterator for DrainSorted<'_, T, A> {}
1944
1945#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1946impl<T: Ord, A: Allocator> FusedIterator for DrainSorted<'_, T, A> {}
1947
1948#[unstable(feature = "trusted_len", issue = "37572")]
1949unsafe impl<T: Ord, A: Allocator> TrustedLen for DrainSorted<'_, T, A> {}
1950
1951#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1952impl<T: Ord, A: Allocator> From<Vec<T, A>> for BinaryHeap<T, A> {
1953    /// Converts a `Vec<T>` into a `BinaryHeap<T>`.
1954    ///
1955    /// This conversion happens in-place, and has *O*(*n*) time complexity.
1956    fn from(vec: Vec<T, A>) -> BinaryHeap<T, A> {
1957        let mut heap = BinaryHeap { data: vec };
1958        heap.rebuild();
1959        heap
1960    }
1961}
1962
1963#[stable(feature = "std_collections_from_array", since = "1.56.0")]
1964impl<T: Ord, const N: usize> From<[T; N]> for BinaryHeap<T> {
1965    /// ```
1966    /// use std::collections::BinaryHeap;
1967    ///
1968    /// let mut h1 = BinaryHeap::from([1, 4, 2, 3]);
1969    /// let mut h2: BinaryHeap<_> = [1, 4, 2, 3].into();
1970    /// while let Some((a, b)) = h1.pop().zip(h2.pop()) {
1971    ///     assert_eq!(a, b);
1972    /// }
1973    /// ```
1974    fn from(arr: [T; N]) -> Self {
1975        Self::from_iter(arr)
1976    }
1977}
1978
1979#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1980impl<T, A: Allocator> From<BinaryHeap<T, A>> for Vec<T, A> {
1981    /// Converts a `BinaryHeap<T>` into a `Vec<T>`.
1982    ///
1983    /// This conversion requires no data movement or allocation, and has
1984    /// constant time complexity.
1985    fn from(heap: BinaryHeap<T, A>) -> Vec<T, A> {
1986        heap.data
1987    }
1988}
1989
1990#[stable(feature = "rust1", since = "1.0.0")]
1991impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
1992    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BinaryHeap<T> {
1993        BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
1994    }
1995}
1996
1997#[stable(feature = "rust1", since = "1.0.0")]
1998impl<T, A: Allocator> IntoIterator for BinaryHeap<T, A> {
1999    type Item = T;
2000    type IntoIter = IntoIter<T, A>;
2001
2002    /// Creates a consuming iterator, that is, one that moves each value out of
2003    /// the binary heap in arbitrary order. The binary heap cannot be used
2004    /// after calling this.
2005    ///
2006    /// # Examples
2007    ///
2008    /// Basic usage:
2009    ///
2010    /// ```
2011    /// use std::collections::BinaryHeap;
2012    /// let heap = BinaryHeap::from([1, 2, 3, 4]);
2013    ///
2014    /// // Print 1, 2, 3, 4 in arbitrary order
2015    /// for x in heap.into_iter() {
2016    ///     // x has type i32, not &i32
2017    ///     println!("{x}");
2018    /// }
2019    /// ```
2020    fn into_iter(self) -> IntoIter<T, A> {
2021        IntoIter { iter: self.data.into_iter() }
2022    }
2023}
2024
2025#[stable(feature = "rust1", since = "1.0.0")]
2026impl<'a, T, A: Allocator> IntoIterator for &'a BinaryHeap<T, A> {
2027    type Item = &'a T;
2028    type IntoIter = Iter<'a, T>;
2029
2030    fn into_iter(self) -> Iter<'a, T> {
2031        self.iter()
2032    }
2033}
2034
2035#[stable(feature = "rust1", since = "1.0.0")]
2036impl<T: Ord, A: Allocator> Extend<T> for BinaryHeap<T, A> {
2037    #[inline]
2038    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
2039        let guard = RebuildOnDrop { rebuild_from: self.len(), heap: self };
2040        guard.heap.data.extend(iter);
2041    }
2042
2043    #[inline]
2044    fn extend_one(&mut self, item: T) {
2045        self.push(item);
2046    }
2047
2048    #[inline]
2049    fn extend_reserve(&mut self, additional: usize) {
2050        self.reserve(additional);
2051    }
2052}
2053
2054#[stable(feature = "extend_ref", since = "1.2.0")]
2055impl<'a, T: 'a + Ord + Copy, A: Allocator> Extend<&'a T> for BinaryHeap<T, A> {
2056    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
2057        self.extend(iter.into_iter().cloned());
2058    }
2059
2060    #[inline]
2061    fn extend_one(&mut self, &item: &'a T) {
2062        self.push(item);
2063    }
2064
2065    #[inline]
2066    fn extend_reserve(&mut self, additional: usize) {
2067        self.reserve(additional);
2068    }
2069}